In [1]:
%load_ext autoreload
%autoreload 2

In [5]:
import os
#import warnings
#warnings.simplefilter('ignore')
from operator import attrgetter
from platform import python_version_tuple

if python_version_tuple()[0] == 3:
    xrange = range

import scipy as sp
import scipy.ndimage
import numpy as np
import pandas as pd
import skimage
import skimage.measure
from PIL import Image
import matplotlib.pyplot as plt
%matplotlib inline
import cv2
import h5py
from tqdm import tqdm_notebook
from IPython.display import display
from extract_data import *

In [6]:
h5f = h5py.File(os.path.join(os.path.dirname(os.getcwd()), 'data', 'hdf5_datasets', 'all_data.hdf5'), 'r+')

Exploring data


In [3]:
X, Y, filenames = extract_DRIONS_DB(os.path.join(os.path.dirname(os.getcwd()), 'data', 'DRIONS-DB'), expert=1)
len(X), X[0].shape, len(Y), Y[0].shape, len(filenames)


Out[3]:
(110, (560, 560, 3), 110, (560, 560, 1), 110)

In [41]:
for i in xrange(10):
    cur = X[i].copy()
    cur[np.where(Y[i])] /= 4
    plt.imshow(X[i])
    plt.show()
    plt.imshow(cur)
    plt.show()



In [10]:
X, Y, filenames, is_ill = extract_RIM_ONE_v2(os.path.join(os.path.dirname(os.getcwd()), 'data', 'RIM-ONE v2'))
len(X), X[0].shape, len(Y), Y[0].shape, len(filenames), len(is_ill)


Out[10]:
(455, (394, 394, 3), 455, (394, 394, 1), 455, 455)

In [12]:
for i in xrange(10):
    cur = X[i].copy()
    cur[np.where(Y[i])] /= 4
    plt.imshow(X[i])
    plt.show()
    plt.imshow(cur)
    plt.show()



In [7]:
X, disc, cup, filenames, is_ill = extract_RIM_ONE_v3(os.path.join(os.path.dirname(os.getcwd()), 'data', 'RIM-ONE v3'))
len(X), X[0].shape, len(disc), disc[0].shape, len(cup), cup[0].shape, \
    len(filenames), len(is_ill)


Out[7]:
(159, (1424, 1424, 3), 159, (1424, 1424, 1), 159, (1424, 1424, 1), 159, 159)

In [8]:
for i in xrange(10):
    print(filenames[i])
    cur = X[i].copy()
    cur[np.where(disc[i])] /= 4
    plt.imshow(X[i])
    plt.show()
    plt.imshow(cur)
    plt.show()


N-1-L
N-10-R
N-11-L
N-12-R
N-13-L
N-14-R
N-16-R
N-17-L
N-18-R
N-2-R

In [18]:
X, Y, filenames, is_ill = extract_HRF(os.path.join(os.path.dirname(os.getcwd()), 'data', 'HRF'))
len(X), X[0].shape, len(Y), Y[0].shape, len(filenames), len(is_ill)


Out[18]:
(30, (3504, 3504, 3), 30, (3504, 3504), 30, 30)

In [7]:
for i in xrange(10):
    cur = X[i].copy()
    cur[np.where(Y[i])] /= 4
    plt.imshow(X[i])
    plt.show()
    plt.imshow(cur)
    plt.show()



In [23]:
X, disc, cup, file_codes = extract_DRISHTI_GS_train(os.path.join(os.path.dirname(os.getcwd()), 'data', 'DRISHTI-GS'))
print(len(X), len(disc), len(cup), len(file_codes))
print(len(X), X[0].shape, len(disc), disc[0].shape, len(cup), cup[0].shape, len(file_codes))


50 50 50 50
50 (2040, 2040, 3) 50 (2040, 2040, 1) 50 (2040, 2040, 1) 50

In [15]:
for i in xrange(10):
    cur = X[i].copy()
    cur[np.where(disc[i])] /= 4
    plt.imshow(X[i])
    plt.show()
    plt.imshow(cur)
    plt.show()



In [16]:
for i in xrange(10):
    cur = X[i].copy()
    cur[np.where(cup[i])] /= 4
    plt.imshow(X[i])
    plt.show()
    plt.imshow(cur)
    plt.show()



In [28]:
X, file_codes = extract_DRISHTI_GS_test(os.path.join(os.path.dirname(os.getcwd()), 'data', 'DRISHTI-GS'))
print(len(X), len(file_codes))
print(len(X), X[0].shape, len(file_codes))


51 51
51 (2040, 2040, 3) 51

In [27]:
for i in xrange(10):
    plt.imshow(X[i])
    plt.show()


Saving original data


In [ ]:
h5f.create_group("DRIONS-DB/orig")
h5f.create_group("RIM-ONE v2/orig")
h5f.create_group("RIM-ONE v3/orig")
h5f.create_group("HRF/orig")

DRIONS-DB


In [5]:
X, Y, file_codes = extract_DRIONS_DB(os.path.join(os.path.dirname(os.getcwd()), 'data', 'DRIONS-DB'), expert=1)
len(X), X[0].shape, len(Y), Y[0].shape, len(file_codes)


Out[5]:
(110, (560, 560, 3), 110, (560, 560, 1), 110)

In [6]:
resolution = get_resolution_DRIONS_DB()
X_ds = h5f.create_dataset("DRIONS-DB/orig/images", 
                          (len(X),) + resolution + (3,), 
                          chunks=(2,) + resolution + (3,), 
                          dtype='u1', compression="gzip")
disc_ds = h5f.create_dataset("DRIONS-DB/orig/disc", 
                             (len(X),) + resolution + (1,),  
                             chunks=(2,) + resolution + (1,), 
                             dtype='u1', compression="gzip")
file_codes_ds = h5f.create_dataset("DRIONS-DB/orig/file_codes", 
                                   (len(X),),  
                                   chunks=True, 
                                   dtype='S3', compression="gzip")

In [7]:
X_ds[:] = np.array(X)
disc_ds[:] = np.array(Y)
file_codes_ds[:] = np.array(file_codes)

In [6]:
train_frac = 0.7

train_idx = np.random.choice(len(X), size=int(train_frac * len(X)), replace=False)
test_idx = np.array(list(set(range(len(X))) - set(train_idx)))
print(train_idx, test_idx)


[ 74  87  58  52  53  26  25  56  98   5  12   2 105  90  54  94  68  73
  47   3  88  78  89  75  40  11  41  19  67  37  18  86  71  91  50 108
 103  39  20  57  31  66   4  48  27  33  44  81  49  24  85  38 107  92
  10  72  61  15 109  59 104  42  65  22  46 101  79  21  97  23   9  60
  93  70  29   7  62] [  0   1   6   8  13  14  16  17  28  30  32  34  35  36  43  45  51  55
  63  64  69  76  77  80  82  83  84  95  96  99 100 102 106]

In [7]:
train_idx_ds = h5f.create_dataset("DRIONS-DB/train_idx", data=train_idx)
test_id_ds = h5f.create_dataset("DRIONS-DB/test_idx", data=test_idx)

In [56]:
#del h5f["DRIONS-DB/orig/images"]
#del h5f["DRIONS-DB/orig/disc"]
#del h5f["DRIONS-DB/orig/file_codes"]

RIM-ONE v2


In [18]:
X, Y, file_codes, is_ill = extract_RIM_ONE_v2(os.path.join(os.path.dirname(os.getcwd()), 'data', 'RIM-ONE v2'))
len(X), X[0].shape, len(Y), Y[0].shape, len(file_codes), len(is_ill)


Out[18]:
(455, (394, 394, 3), 455, (394, 394, 1), 455, 455)

In [12]:
resolution = get_resolution_RIM_ONE_v2()
str_dt = h5py.special_dtype(vlen=unicode)

X_ds = h5f.create_dataset("RIM-ONE v2/orig/images", 
                          (len(X),) + resolution + (3,), 
                          chunks=(2,) + resolution + (3,), 
                          dtype='u1', compression="gzip")
disc_ds = h5f.create_dataset("RIM-ONE v2/orig/disc", 
                             (len(X),) + resolution + (1,),  
                             chunks=(2,) + resolution + (1,), 
                             dtype='u1', compression="gzip")
file_codes_ds = h5f.create_dataset("RIM-ONE v2/orig/file_codes", 
                                   (len(X),),  
                                   chunks=True, 
                                   dtype=str_dt, compression="gzip")
is_ill_ds = h5f.create_dataset("RIM-ONE v2/orig/is_ill", 
                               (len(X),),
                               chunks=True, 
                               dtype='u1', compression="gzip")

In [13]:
X_ds[:] = np.array(X)
disc_ds[:] = np.array(Y)
file_codes_ds[:] = np.array(file_codes)
is_ill_ds[:] = np.array(is_ill)

In [19]:
train_frac = 0.7

train_idx = np.random.choice(len(X), size=int(train_frac * len(X)), replace=False)
test_idx = np.array(list(set(range(len(X))) - set(train_idx)))
print(train_idx, test_idx)


[158 338  23 335 157 325 226 216 122 224 315 451 392 350 100 141 396 280
  24 160 214 229 203 283  18 379 267 227 377 374 147 434 281 236  57 409
 218 183 339 143 217 156 197 255  58 366 104 423 134 410 386 435  31 108
   5 441 354 314 210   1 175 130 285 171 360  43 151  10 431 275 244  14
 167 219 163 305 288 372  56  79 107  26 329  60 447 448 368 378 240 172
 370 140 450 161 389 445 401 132 125 153 391 411 239 454 415  71  53 186
 320  37 427  50 387 142 190 230 213 194 333 121 406 128 299  51 443  20
  22 266 154 282 365 195 144  77 123 225 332 106  32 331 278  99 248 135
 205  21 231 312 421  61 196  52 395 222 279 429 166  65 101  93 307 444
  55 388 115 384  28 399 241 296 426 412 363 436  49 289 212  74 173 215
  96 262 136  87  11 110 373  88 418  94 131 111 390  70 223 102  97 246
 220 185 394 342 245 178 114 301 191 439 309  78 235 247 168 351  30 357
 400  98 419 276 323 326 182 202 187 211 127 198 380  38 319 237 318 159
 206 430 352 449  85 398 145 428 347  82 150 177 184 340 302 385 109 359
 233  54   0  40 405  39  64 165 277 265 376 207 446 402  46 179  15  41
 291  89   7 117 417 263 164 253  75 264 286 369 252   8 416 383 438 254
 169 358 361 414 348 116 344 403 306 292 120 422 425 234 270 324   4 152
 272 170 112 176  13  91  16   3 221  29  47 327] [  2   6   9  12  17  19  25  27  33  34  35  36  42  44  45  48  59  62
  63  66  67  68  69  72  73  76  80  81  83  84  86  90  92  95 103 105
 113 118 119 124 126 129 133 137 138 139 146 148 149 155 162 174 180 181
 188 189 192 193 199 200 201 204 208 209 228 232 238 242 243 249 250 251
 256 257 258 259 260 261 268 269 271 273 274 284 287 290 293 294 295 297
 298 300 303 304 308 310 311 313 316 317 321 322 328 330 334 336 337 341
 343 345 346 349 353 355 356 362 364 367 371 375 381 382 393 397 404 407
 408 413 420 424 432 433 437 440 442 452 453]

In [23]:
print(len(train_idx), len(test_idx), len(file_codes))
print(len(train_idx) + len(test_idx) == len(file_codes))


318 137 455
True

In [21]:
train_idx_ds = h5f.create_dataset("RIM-ONE v2/train_idx", data=train_idx)
test_id_ds = h5f.create_dataset("RIM-ONE v2/test_idx", data=test_idx)

RIM-ONE v3


In [8]:
X, disc, cup, file_codes, is_ill = extract_RIM_ONE_v3(os.path.join(os.path.dirname(os.getcwd()), 'data', 'RIM-ONE v3'))
len(X), X[0].shape, len(disc), disc[0].shape, len(cup), cup[0].shape, \
    len(file_codes), len(is_ill)


Out[8]:
(159, (1424, 1424, 3), 159, (1424, 1424, 1), 159, (1424, 1424, 1), 159, 159)

In [9]:
resolution = get_resolution_RIM_ONE_v3()
str_dt = h5py.special_dtype(vlen=unicode)

In [10]:
X_ds = h5f.create_dataset("RIM-ONE v3/orig/images", 
                          (len(X),) + resolution + (3,), 
                          chunks=(2,) + resolution + (3,), 
                          dtype='u1', compression="gzip")
disc_ds = h5f.create_dataset("RIM-ONE v3/orig/disc", 
                             (len(X),) + resolution + (1,),  
                             chunks=(2,) + resolution + (1,), 
                             dtype='u1', compression="gzip")
file_codes_ds = h5f.create_dataset("RIM-ONE v3/orig/file_codes", 
                                   (len(X),),  
                                   chunks=True, 
                                   dtype=str_dt, compression="gzip")
is_ill_ds = h5f.create_dataset("RIM-ONE v3/orig/is_ill", 
                               (len(X),),
                               chunks=True, 
                               dtype='u1', compression="gzip")

In [11]:
X_ds[:] = np.array(X)
disc_ds[:] = np.array(disc)
file_codes_ds[:] = np.array(file_codes)
is_ill_ds[:] = np.array(is_ill)

In [12]:
cup_ds = h5f.create_dataset("RIM-ONE v3/orig/cup", 
                            (len(X),) + resolution + (1,),  
                            chunks=(2,) + resolution + (1,), 
                            dtype='u1', compression="gzip")

In [13]:
cup_ds[:] = np.array(cup)

In [14]:
driu_imgs, driu_filenames = imh.load_set(os.path.join(os.path.dirname(os.getcwd()), 'data', 'RIM-ONE v3', 'DRIU results', 'DRIU'))
driu_filecodes = map(lambda s: s[s.rfind('/') + 1:s.rfind('.')], driu_filenames)

In [15]:
test_idx = [file_codes.index(el) for el in driu_filecodes]
train_idx = list(set(range(len(file_codes))) - set(test_idx))
len(train_idx), len(test_idx), len(file_codes)


Out[15]:
(99, 60, 159)

In [16]:
train_idx_ds = h5f.create_dataset("RIM-ONE v3/train_idx_driu", data=train_idx)
test_idx_ds = h5f.create_dataset("RIM-ONE v3/test_idx_driu", data=test_idx)

HRF


In [24]:
X, Y, file_codes, is_ill = extract_HRF(os.path.join(os.path.dirname(os.getcwd()), 'data', 'HRF'))
len(X), X[0].shape, len(Y), Y[0].shape, len(file_codes), len(is_ill)


Out[24]:
(30, (3504, 3504, 3), 30, (3504, 3504, 1), 30, 30)

In [22]:
resolution = get_resolution_HRF()
str_dt = h5py.special_dtype(vlen=unicode)

X_ds = h5f.create_dataset("HRF/orig/images", 
                          (len(X),) + resolution + (3,), 
                          chunks=(2,) + resolution + (3,), 
                          dtype='u1', compression="gzip")
disc_ds = h5f.create_dataset("HRF/orig/disc", 
                             (len(X),) + resolution + (1,),  
                             chunks=(2,) + resolution + (1,), 
                             dtype='u1', compression="gzip")
file_codes_ds = h5f.create_dataset("HRF/orig/file_codes", 
                                   (len(X),),  
                                   chunks=True, 
                                   dtype=str_dt, compression="gzip")
is_ill_ds = h5f.create_dataset("HRF/orig/is_ill", 
                               (len(X),),
                               chunks=True, 
                               dtype='u1', compression="gzip")

In [23]:
X_ds[:] = np.array(X)
disc_ds[:] = np.array(Y)
file_codes_ds[:] = np.array(file_codes)
is_ill_ds[:] = np.array(is_ill)

In [25]:
train_frac = 0.7

train_idx = np.random.choice(len(X), size=int(train_frac * len(X)), replace=False)
test_idx = np.array(list(set(range(len(X))) - set(train_idx)))
print(train_idx, test_idx)


[18 19 28  1 21  4  7 11 10  9  0  6 22 16 27 14  8 20 25  5 13] [ 2  3 12 15 17 23 24 26 29]

In [26]:
print(len(train_idx), len(test_idx), len(file_codes))
print(len(train_idx) + len(test_idx) == len(file_codes))


21 9 30
True

In [27]:
train_idx_ds = h5f.create_dataset("HRF/train_idx", data=train_idx)
test_idx_ds = h5f.create_dataset("HRF/test_idx", data=test_idx)

DRISHTI-GS (train)


In [29]:
X, disc, cup, file_codes = extract_DRISHTI_GS_train(os.path.join(os.path.dirname(os.getcwd()), 'data', 'DRISHTI_GS'))
len(X), X[0].shape, len(disc), disc[0].shape, len(cup), cup[0].shape, len(file_codes)


Out[29]:
(50, (2040, 2040, 3), 50, (2040, 2040, 1), 50, (2040, 2040, 1), 50)

In [30]:
resolution = get_resolution_DRISHTI_GS()
str_dt = h5py.special_dtype(vlen=unicode)

In [13]:
X_ds = h5f.create_dataset("DRISHTI-GS/orig/images", 
                          (len(X),) + resolution + (3,), 
                          chunks=(2,) + resolution + (3,), 
                          dtype='u1', compression="gzip")
disc_ds = h5f.create_dataset("DRISHTI-GS/orig/disc", 
                             (len(X),) + resolution + (1,),  
                             chunks=(2,) + resolution + (1,), 
                             dtype='u1', compression="gzip")
cup_ds = h5f.create_dataset("DRISHTI-GS/orig/cup", 
                             (len(X),) + resolution + (1,),  
                             chunks=(2,) + resolution + (1,), 
                             dtype='u1', compression="gzip")
file_codes_ds = h5f.create_dataset("DRISHTI-GS/orig/file_codes", 
                                   (len(X),),  
                                   chunks=True, 
                                   dtype=str_dt, compression="gzip")

In [14]:
X_ds[:] = np.array(X)
disc_ds[:] = np.array(disc)
cup_ds[:] = np.array(cup)
file_codes_ds[:] = np.array(file_codes)

In [31]:
train_frac = 0.7

train_idx = np.random.choice(len(X), size=int(train_frac * len(X)), replace=False)
test_idx = np.array(list(set(range(len(X))) - set(train_idx)))
print(train_idx, test_idx)


[29 24 46 23 25 19  2 42 20 38 28  7 45  8 17 32 18  1 39  4 13 41 33 31 43
 22  3 36 48  0 12 30  5 16 10] [34 35 37  6 47 40  9 11 44 14 15 49 21 26 27]

In [32]:
print(len(train_idx), len(test_idx), len(file_codes))
print(len(train_idx) + len(test_idx) == len(file_codes))


35 15 50
True

In [33]:
train_idx_ds = h5f.create_dataset("DRISHTI-GS/train_idx", data=train_idx)
test_idx_ds = h5f.create_dataset("DRISHTI-GS/test_idx", data=test_idx)

DRISHTI-GS (test)


In [16]:
X, file_codes = extract_DRISHTI_GS_test(os.path.join(os.path.dirname(os.getcwd()), 'data', 'DRISHTI_GS'))
len(X), X[0].shape, len(file_codes)


Out[16]:
(51, (2040, 2040, 3), 51)

In [17]:
resolution = get_resolution_DRISHTI_GS()
str_dt = h5py.special_dtype(vlen=unicode)

In [18]:
X_ds = h5f.create_dataset("DRISHTI-GS-test/orig/images", 
                          (len(X),) + resolution + (3,), 
                          chunks=(2,) + resolution + (3,), 
                          dtype='u1', compression="gzip")
file_codes_ds = h5f.create_dataset("DRISHTI-GS-test/orig/file_codes", 
                                   (len(X),),  
                                   chunks=True, 
                                   dtype=str_dt, compression="gzip")

In [19]:
X_ds[:] = np.array(X)
file_codes_ds[:] = np.array(file_codes)

Saving resized data


In [20]:
resolutions = (512, 256, 128)
#db_names = ('DRIONS-DB', 'RIM-ONE v2', 'RIM-ONE v3', 'HRF', 'DRISHTI-GS')
db_names = ('DRISHTI-GS',)

In [21]:
for res in resolutions:
    h5f.create_group("DRIONS-DB/{} px".format(res))
    h5f.create_group("RIM-ONE v2/{} px".format(res))
    h5f.create_group("RIM-ONE v3/{} px".format(res))
    h5f.create_group("HRF/{} px".format(res))
    h5f.create_group("DRISHTI-GS/{} px".format(res))

In [22]:
for db in db_names:
    X_ds = h5f['{}/orig/images'.format(db)]
    disc_ds = h5f['{}/orig/disc'.format(db)]
    file_codes_ds = h5f['{}/orig/file_codes'.format(db)]
    if db != 'DRIONS-DB' and db != 'DRISHTI-GS':
        is_ill_ds = h5f['{}/orig/is_ill'.format(db)]
    
    for res in resolutions:
        print('Copying data for {}, resolution {}'.format(db, res))
        X_small_ds = h5f.create_dataset("{}/{} px/images".format(db, res), 
                                        (X_ds.shape[0], res, res, 3,), 
                                        chunks=(2, res, res, 3,), 
                                        dtype='u1', compression="gzip")
        disc_small_ds = h5f.create_dataset("{}/{} px/disc".format(db, res), 
                                           (X_ds.shape[0], res, res, 1,),  
                                           chunks=(2, res, res, 1,), 
                                           dtype='u1', compression="gzip")
        str_dt = h5py.special_dtype(vlen=unicode)
        file_codes_small_ds = h5f.create_dataset("{}/{} px/file_codes".format(db, res), 
                                                 (X_ds.shape[0],),  
                                                 chunks=True, 
                                                 dtype=str_dt, compression="gzip")
        
        for i in tqdm_notebook(xrange(X_ds.shape[0]), leave=True):
            X_small_ds[i] = sp.misc.imresize(X_ds[i], size=(res, res))
            cur_disc_small = sp.misc.imresize(disc_ds[i][..., 0], size=(res, res))
            disc_small_ds[i] = cur_disc_small[..., np.newaxis]
        file_codes_small_ds[:] = file_codes_ds[:]
        
        if db != 'DRIONS-DB' and db != 'DRISHTI-GS':
            is_ill_small_ds = h5f.create_dataset("{}/{} px/is_ill".format(db, res), 
                                                 (X_ds.shape[0],),
                                                 chunks=True, 
                                                 dtype='u1', compression="gzip")
            is_ill_small_ds[:] = is_ill_ds


Copying data for DRISHTI-GS, resolution 512

Copying data for DRISHTI-GS, resolution 256

Copying data for DRISHTI-GS, resolution 128


In [23]:
# Copying cup for RIM-ONE v3 and DRISHTI-GS

for db in ('RIM-ONE v3', 'DRISHTI-GS'):
    X_ds = h5f['{}/orig/images'.format(db)]
    cup_ds = h5f['{}/orig/cup'.format(db)]

    for res in resolutions:
        print('Copying cup for {}, resolution {}'.format(db, res))
        cup_small_ds = h5f.create_dataset("{}/{} px/cup".format(db, res), 
                                          (X_ds.shape[0], res, res, 1,),  
                                          chunks=(2, res, res, 1,), 
                                          dtype='u1', compression="gzip")
        for i in tqdm_notebook(xrange(X_ds.shape[0]), leave=True):
            cur_cup_small = sp.misc.imresize(cup_ds[i][..., 0], size=(res, res))
            cup_small_ds[i] = cur_cup_small[..., np.newaxis]


Copying cup for DRISHTI-GS, resolution 512

Copying cup for DRISHTI-GS, resolution 256

Copying cup for DRISHTI-GS, resolution 128


In [20]:
# Finding disc bounding box on RIM-ONE v3 512px, saving its coordinates,
# and saving bounding box resized to standard resolution

gap = 20
standard_res = (256, 256)

print('Saving disc bbox coordinates for RIM-ONE v3')
X_ds = h5f['RIM-ONE v3/512 px/images']
disc_ds = h5f['RIM-ONE v3/512 px/disc']
cup_ds = h5f['RIM-ONE v3/512 px/cup']
disc_locations = np.empty((X_ds.shape[0], 4), dtype=np.int64)
# disc_locations[i] stores (min_i, min_j, max_i, max_j)
cup_cropped = np.empty((X_ds.shape[0],) + standard_res + (1,), dtype=np.uint8)
img_cropped = np.empty((X_ds.shape[0],) + standard_res + (3,), dtype=np.uint8)

for i in tqdm_notebook(xrange(X_ds.shape[0]), leave=True):
    img = X_ds[i]
    disc = disc_ds[i, ..., 0]
    cup = cup_ds[i, ..., 0]

    labeled = skimage.measure.label(disc)
    region_props = skimage.measure.regionprops(labeled)
    component = max(region_props, key=attrgetter('area'))    # there should be only 1 component, 
                                                             # so this is a safety measure
    disc_locations[i][0] = max(component.bbox[0] - gap, 0)
    disc_locations[i][1] = max(component.bbox[1] - gap, 0)
    disc_locations[i][2] = min(component.bbox[2] + gap, img.shape[0] - 1)
    disc_locations[i][3] = min(component.bbox[3] + gap, img.shape[1] - 1)
    
    cup_cur_cropped = cup[disc_locations[i][0]:disc_locations[i][2],
                           disc_locations[i][1]:disc_locations[i][3]]
    cup_cur_cropped = sp.misc.imresize(cup_cur_cropped, standard_res)
    cup_cropped[i, ..., 0] = cup_cur_cropped
    img_cur_cropped = img[disc_locations[i][0]:disc_locations[i][2],
                           disc_locations[i][1]:disc_locations[i][3]]
    img_cur_cropped = sp.misc.imresize(img_cur_cropped, standard_res)
    img_cropped[i] = img_cur_cropped
    

h5f['RIM-ONE v3/512 px/disc_locations'] = disc_locations
#h5f['RIM-ONE v3/512 px/cup_cropped'] = cup_cropped
#h5f['RIM-ONE v3/512 px/img_cropped'] = img_cropped


Saving disc bbox coordinates for RIM-ONE v3


In [36]:
plt.imshow(img_cropped[0])


Out[36]:
<matplotlib.image.AxesImage at 0x7f3e7efe4a10>

In [34]:
h5f.close()

In [ ]: